library(tidyverse)
## ── Attaching packages ───────────────────────────────────────────────────────────── tidyverse 1.2.1 ──
## ✓ ggplot2 3.2.1 ✓ purrr 0.3.4
## ✓ tibble 3.0.1 ✓ dplyr 1.0.0
## ✓ tidyr 0.8.3 ✓ stringr 1.4.0
## ✓ readr 1.3.1 ✓ forcats 0.4.0
## Warning: package 'tibble' was built under R version 3.6.2
## Warning: package 'purrr' was built under R version 3.6.2
## Warning: package 'dplyr' was built under R version 3.6.2
## ── Conflicts ──────────────────────────────────────────────────────────────── tidyverse_conflicts() ──
## x dplyr::filter() masks stats::filter()
## x dplyr::lag() masks stats::lag()
library(readr)
library(TSstudio)
library(xts)
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
##
## Attaching package: 'xts'
## The following objects are masked from 'package:dplyr':
##
## first, last
library(quantmod)
## Loading required package: TTR
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
## Version 0.4-0 included new data defaults. See ?getSymbols.
library(forecast)
## Warning: package 'forecast' was built under R version 3.6.2
library(tis)
##
## Attaching package: 'tis'
## The following object is masked from 'package:forecast':
##
## easter
## The following object is masked from 'package:quantmod':
##
## Lag
## The following object is masked from 'package:TTR':
##
## lags
## The following object is masked from 'package:dplyr':
##
## between
library(gridExtra)
##
## Attaching package: 'gridExtra'
## The following object is masked from 'package:dplyr':
##
## combine
library(timeDate)
##
## Attaching package: 'timeDate'
## The following objects are masked from 'package:tis':
##
## dayOfWeek, dayOfYear, isHoliday
library(ggplot2)
library(urca)
library(alr3)
## Loading required package: car
## Loading required package: carData
##
## Attaching package: 'car'
## The following object is masked from 'package:dplyr':
##
## recode
## The following object is masked from 'package:purrr':
##
## some
library(ggcorrplot)
library(tidyr)
library(dplyr)
library(dygraphs)
# Provides data frame 'full' with UMCSENT series and regressors
source(url('https://raw.githubusercontent.com/andrewwinn3/stat626/master/Data/Setup.R'))
## Warning: package 'astsa' was built under R version 3.6.2
##
## Attaching package: 'astsa'
## The following object is masked from 'package:forecast':
##
## gas
attach(full.train)
source('~/Downloads/FM_Functions.R')
source('~/Downloads/ARMAroots_RFunctions.R')
umcsent <- read_csv(file = '~/Downloads/UMCSENT-2.csv')
## Parsed with column specification:
## cols(
## DATE = col_date(format = ""),
## UMCSENT = col_double()
## )
umcsent.df <- data.frame(
DATE <- as.Date(umcsent$DATE),
UMSCENT = as.numeric(umcsent$UMCSENT))
UMSCENT = as.numeric(umcsent$UMCSENT)
DATE <- as.Date(umcsent$DATE)
WTI <- read_csv(file = '~/Downloads/WTISPLC.csv')
## Parsed with column specification:
## cols(
## DATE = col_date(format = ""),
## WTISPLC = col_double()
## )
WTI.df <- data.frame(
WTI.date <- as.Date(WTI$DATE),
WTI = as.numeric(WTI$WTISPLC))
WTI.date <- as.Date(WTI$DATE)[-1]
WTI = as.numeric(WTI$WTISPLC)[-1]
unrate <- read_csv(file = '~/Downloads/UNRATE.csv')
## Parsed with column specification:
## cols(
## DATE = col_date(format = ""),
## UNRATE = col_double()
## )
unrate.df <- data.frame(
unrate.date <- as.Date(unrate$DATE),
unrate = as.numeric(unrate$UNRATE))
unrate.date <- as.Date(unrate$DATE)[-1]
unrate = as.numeric(unrate$UNRATE)[-1]
This is mostly a duplicate of what we did before. However, we need the UMCSENT series to be stationary, and we will also employ a model which combines the predicted values from the ARIMA UMCSENT model with the covariate series.
lm.UMCSENT = auto.arima(UMCSENT, lambda='auto', seasonal=FALSE, stepwise = FALSE)
lm.UMCSENT
## Series: UMCSENT
## ARIMA(2,1,2)
## Box Cox transformation: lambda= 1.999924
##
## Coefficients:
## ar1 ar2 ma1 ma2
## 1.2965 -0.5628 -1.3904 0.5739
## s.e. 0.2898 0.2340 0.2907 0.2619
##
## sigma^2 estimated as 101132: log likelihood=-3488.02
## AIC=6986.05 AICc=6986.17 BIC=7006.98
checkresiduals(lm.UMCSENT)
##
## Ljung-Box test
##
## data: Residuals from ARIMA(2,1,2)
## Q* = 6.4182, df = 6, p-value = 0.378
##
## Model df: 4. Total lags used: 10
UMCSENT.fitted = fitted(lm.UMCSENT)
# I don't know why the a (2,0,2) model isn't recommended here, since UMCSENT.stab is produced by the recommendationded transformation in the previous code, being a (2,1,2) model with $\lambda = 2$ Box-Cox transformation.
lm.UMCSENT.stab = auto.arima(UMCSENT.stab, seasonal=FALSE, stepwise=FALSE)
lm.UMCSENT.stab
## Series: UMCSENT.stab
## ARIMA(5,0,0) with zero mean
##
## Coefficients:
## ar1 ar2 ar3 ar4 ar5
## -0.1041 -0.1187 -0.0922 -0.0829 -0.1201
## s.e. 0.0451 0.0453 0.0453 0.0453 0.0453
##
## sigma^2 estimated as 101324: log likelihood=-3495.18
## AIC=7002.35 AICc=7002.53 BIC=7027.48
checkresiduals(lm.UMCSENT.stab)
##
## Ljung-Box test
##
## data: Residuals from ARIMA(5,0,0) with zero mean
## Q* = 2.2803, df = 5, p-value = 0.8092
##
## Model df: 5. Total lags used: 10
UMCSENT.stab.fitted = fitted(lm.UMCSENT.stab)
ARIMA(0,1,0) model with \(\delta = 0.0089_{(0.0025)}\) drift and \(\lambda = 0.041\) B-C transformation.
lm.SP = auto.arima(SP, lambda='auto', seasonal=FALSE, stepwise=FALSE)
lm.SP
## Series: SP
## ARIMA(0,1,0) with drift
## Box Cox transformation: lambda= 0.07318047
##
## Coefficients:
## drift
## 0.0112
## s.e. 0.0031
##
## sigma^2 estimated as 0.004788: log likelihood=608.92
## AIC=-1213.84 AICc=-1213.81 BIC=-1205.46
checkresiduals(lm.SP)
##
## Ljung-Box test
##
## data: Residuals from ARIMA(0,1,0) with drift
## Q* = 9.4986, df = 9, p-value = 0.3926
##
## Model df: 1. Total lags used: 10
None of the transformations provide stationarity. Some high-order residual autocorrelations end up being significant.
lm.DGS1.log = auto.arima(DGS1.log.diff3[-(1:3)], max.p=10, max.q=10, lambda="auto", seasonal=FALSE, stepwise=FALSE)
lm.DGS1.log
## Series: DGS1.log.diff3[-(1:3)]
## ARIMA(1,0,4) with non-zero mean
## Box Cox transformation: lambda= 0.7230363
##
## Coefficients:
## ar1 ma1 ma2 ma3 ma4 mean
## 0.9383 0.2546 -0.0590 -0.6268 -0.2019 -1.3971
## s.e. 0.0359 0.0595 0.0587 0.0536 0.0511 0.0515
##
## sigma^2 estimated as 0.03784: log likelihood=107.5
## AIC=-201 AICc=-200.76 BIC=-171.72
checkresiduals(lm.DGS1.log)
##
## Ljung-Box test
##
## data: Residuals from ARIMA(1,0,4) with non-zero mean
## Q* = 16.289, df = 4, p-value = 0.002655
##
## Model df: 6. Total lags used: 10
ARIMA(1,1,1) model with \(\lambda = -1.000\) B-C transformation.
lm.UNRATE = auto.arima(UNRATE, lambda="auto", seasonal=FALSE, stepwise=FALSE)
lm.UNRATE
## Series: UNRATE
## ARIMA(1,1,3)
## Box Cox transformation: lambda= 0.2778343
##
## Coefficients:
## ar1 ma1 ma2 ma3
## 0.8965 -0.9659 0.1602 0.0764
## s.e. 0.0341 0.0558 0.0643 0.0515
##
## sigma^2 estimated as 0.00171: log likelihood=860.42
## AIC=-1710.84 AICc=-1710.71 BIC=-1689.91
checkresiduals(lm.UNRATE)
##
## Ljung-Box test
##
## data: Residuals from ARIMA(1,1,3)
## Q* = 4.1514, df = 6, p-value = 0.6562
##
## Model df: 4. Total lags used: 10
ARIMA(1,1,0) model with drift and \(\lambda = 0.221\) B-C transformation.
lm.WTI = auto.arima(WTI, lambda="auto", seasonal=FALSE, stepwise=FALSE)
lm.WTI
## Series: WTI
## ARIMA(0,1,1)
## Box Cox transformation: lambda= 0.09025199
##
## Coefficients:
## ma1
## 0.3084
## s.e. 0.0451
##
## sigma^2 estimated as 0.01452: log likelihood=353.94
## AIC=-703.89 AICc=-703.86 BIC=-695.43
checkresiduals(lm.WTI)
##
## Ljung-Box test
##
## data: Residuals from ARIMA(0,1,1)
## Q* = 6.8757, df = 9, p-value = 0.6501
##
## Model df: 1. Total lags used: 10
# Stabilized models are denoted by *.stab
ccf(UMCSENT.stab, SP.stab)
ccf(UMCSENT.stab, UNRATE.stab)
ccf(UMCSENT.stab, WTI.stab)
These models are based solely on the regressors, rather than any autoregression.
lm.leaps = regsubsets(UMCSENT.stab ~ SP.stab.lag1 + SP.stab.lag3 +
UNRATE.stab.lead2 + WTI.stab.lag1, data = full.train)
summary(lm.leaps)
## Subset selection object
## Call: regsubsets.formula(UMCSENT.stab ~ SP.stab.lag1 + SP.stab.lag3 +
## UNRATE.stab.lead2 + WTI.stab.lag1, data = full.train)
## 4 Variables (and intercept)
## Forced in Forced out
## SP.stab.lag1 FALSE FALSE
## SP.stab.lag3 FALSE FALSE
## UNRATE.stab.lead2 FALSE FALSE
## WTI.stab.lag1 FALSE FALSE
## 1 subsets of each size up to 4
## Selection Algorithm: exhaustive
## SP.stab.lag1 SP.stab.lag3 UNRATE.stab.lead2 WTI.stab.lag1
## 1 ( 1 ) "*" " " " " " "
## 2 ( 1 ) "*" " " " " "*"
## 3 ( 1 ) "*" " " "*" "*"
## 4 ( 1 ) "*" "*" "*" "*"
lm.leaps.1 = lm(UMCSENT.stab ~ SP.stab.lag1)
lm.leaps.2 = lm(UMCSENT.stab ~ SP.stab.lag1 + WTI.stab.lag1)
lm.leaps.3 = lm(UMCSENT.stab ~ SP.stab.lag1 + WTI.stab.lag1 + UNRATE.stab.lead2)
lm.leaps.4 = lm(UMCSENT.stab ~ SP.stab.lag1 + WTI.stab.lag1 + UNRATE.stab.lead2 +
SP.stab.lag3)
lm.leaps.AIC = c(
AIC(lm.leaps.1),
AIC(lm.leaps.2),
AIC(lm.leaps.3),
AIC(lm.leaps.4)
)
lm.leaps.BIC = c(
BIC(lm.leaps.1),
BIC(lm.leaps.2),
BIC(lm.leaps.3),
BIC(lm.leaps.4)
)
summary(lm.leaps.1)
##
## Call:
## lm(formula = UMCSENT.stab ~ SP.stab.lag1)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1022.84 -194.35 -6.19 204.80 1257.91
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 4.008 14.213 0.282 0.778
## SP.stab.lag1 1428.798 254.193 5.621 3.21e-08 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 313.7 on 485 degrees of freedom
## Multiple R-squared: 0.06116, Adjusted R-squared: 0.05922
## F-statistic: 31.59 on 1 and 485 DF, p-value: 3.209e-08
summary(lm.leaps.2)
##
## Call:
## lm(formula = UMCSENT.stab ~ SP.stab.lag1 + WTI.stab.lag1)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1033.53 -196.60 -10.74 195.32 1131.82
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 5.873 14.015 0.419 0.675
## SP.stab.lag1 1508.846 251.331 6.003 3.79e-09 ***
## WTI.stab.lag1 -303.402 77.319 -3.924 9.97e-05 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 309.1 on 484 degrees of freedom
## Multiple R-squared: 0.09011, Adjusted R-squared: 0.08635
## F-statistic: 23.97 on 2 and 484 DF, p-value: 1.19e-10
summary(lm.leaps.3)
##
## Call:
## lm(formula = UMCSENT.stab ~ SP.stab.lag1 + WTI.stab.lag1 + UNRATE.stab.lead2)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1039.21 -195.02 -9.67 200.37 1173.07
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 4.258 13.940 0.305 0.76014
## SP.stab.lag1 1449.734 250.716 5.782 1.32e-08 ***
## WTI.stab.lag1 -297.988 76.857 -3.877 0.00012 ***
## UNRATE.stab.lead2 -7664.850 2861.623 -2.678 0.00765 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 307.2 on 483 degrees of freedom
## Multiple R-squared: 0.1034, Adjusted R-squared: 0.09786
## F-statistic: 18.57 on 3 and 483 DF, p-value: 2.038e-11
summary(lm.leaps.4)
##
## Call:
## lm(formula = UMCSENT.stab ~ SP.stab.lag1 + WTI.stab.lag1 + UNRATE.stab.lead2 +
## SP.stab.lag3)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1024.2 -188.6 -4.0 199.5 1193.9
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 4.09 13.88 0.295 0.768351
## SP.stab.lag1 1422.93 249.89 5.694 2.16e-08 ***
## WTI.stab.lag1 -284.52 76.74 -3.707 0.000234 ***
## UNRATE.stab.lead2 -7838.10 2850.06 -2.750 0.006181 **
## SP.stab.lag3 -570.48 248.55 -2.295 0.022147 *
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 305.8 on 482 degrees of freedom
## Multiple R-squared: 0.1131, Adjusted R-squared: 0.1058
## F-statistic: 15.37 on 4 and 482 DF, p-value: 7.706e-12
cat('\nAIC for 1-4 variable models\n', lm.leaps.AIC)
##
## AIC for 1-4 variable models
## 6984.906 6971.654 6966.473 6963.179
cat('\nBIC for 1-4 variable models\n', lm.leaps.BIC)
##
## BIC for 1-4 variable models
## 6997.471 6988.407 6987.415 6988.309
cat('\nI recommend four-variable model.')
##
## I recommend four-variable model.
plot(lm.leaps.4)
We now consider, using backward selection, some models which incorporate an ARIMA component in the UMCSENT series.
lm.arima.4 = auto.arima(UMCSENT, lambda="auto", seasonal=FALSE, stepwise=FALSE,
xreg=cbind(SP.stab.lag1, WTI.stab.lag1,
UNRATE.stab.lead2, SP.stab.lag3))
summary(lm.arima.4)
## Series: UMCSENT
## Regression with ARIMA(2,1,2) errors
## Box Cox transformation: lambda= 1.999924
##
## Coefficients:
## ar1 ar2 ma1 ma2 SP.stab.lag1 WTI.stab.lag1
## 1.3726 -0.6418 -1.446 0.6404 436.4256 -250.2379
## s.e. 0.2168 0.2095 0.213 0.2190 190.4385 69.3122
## UNRATE.stab.lead2 SP.stab.lag3
## -1863.622 -181.1047
## s.e. 2036.415 191.4895
##
## sigma^2 estimated as 97930: log likelihood=-3478.18
## AIC=6974.37 AICc=6974.75 BIC=7012.04
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE
## Training set 0.05914436 3.747685 2.858678 -0.08253374 3.499011 0.9594071
## ACF1
## Training set 0.01426939
cat('\nStandardized regression coefficients:\n')
##
## Standardized regression coefficients:
lm.arima.4.stdcoef=lm.arima.4$coef/sqrt(diag(lm.arima.4$var.coef))
lm.arima.4.stdcoef
## ar1 ar2 ma1 ma2
## 6.3323053 -3.0635115 -6.7884905 2.9233528
## SP.stab.lag1 WTI.stab.lag1 UNRATE.stab.lead2 SP.stab.lag3
## 2.2916877 -3.6102997 -0.9151484 -0.9457682
#Remove UNRATE.stab.lead2
lm.arima.3 = auto.arima(UMCSENT, lambda="auto", seasonal=FALSE, stepwise=FALSE,
xreg=cbind(SP.stab.lag1, WTI.stab.lag1, SP.stab.lag3))
summary(lm.arima.3)
## Series: UMCSENT
## Regression with ARIMA(2,1,2) errors
## Box Cox transformation: lambda= 1.999924
##
## Coefficients:
## ar1 ar2 ma1 ma2 SP.stab.lag1 WTI.stab.lag1
## 1.3682 -0.6332 -1.4424 0.6336 433.3212 -253.6554
## s.e. 0.2220 0.2132 0.2188 0.2227 190.7592 69.2515
## SP.stab.lag3
## -196.8316
## s.e. 191.1271
##
## sigma^2 estimated as 97895: log likelihood=-3478.6
## AIC=6973.2 AICc=6973.5 BIC=7006.69
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE
## Training set 0.05976763 3.747525 2.86839 -0.0808148 3.508861 0.9626665
## ACF1
## Training set 0.01731555
cat('\nStandardized regression coefficients:\n')
##
## Standardized regression coefficients:
lm.arima.3.stdcoef=lm.arima.3$coef/sqrt(diag(lm.arima.3$var.coef))
lm.arima.3.stdcoef
## ar1 ar2 ma1 ma2 SP.stab.lag1
## 6.163206 -2.969251 -6.591543 2.845063 2.271561
## WTI.stab.lag1 SP.stab.lag3
## -3.662815 -1.029847
#Remove SP.stab.lag3
lm.arima.2 = auto.arima(UMCSENT, lambda="auto", seasonal=FALSE, stepwise=FALSE,
xreg=cbind(SP.stab.lag1, WTI.stab.lag1))
summary(lm.arima.2)
## Series: UMCSENT
## Regression with ARIMA(2,1,2) errors
## Box Cox transformation: lambda= 1.999924
##
## Coefficients:
## ar1 ar2 ma1 ma2 SP.stab.lag1 WTI.stab.lag1
## 1.3628 -0.6231 -1.4444 0.6314 446.7771 -253.7302
## s.e. 0.2251 0.2123 0.2210 0.2209 191.5531 69.5000
##
## sigma^2 estimated as 97899: log likelihood=-3479.12
## AIC=6972.24 AICc=6972.47 BIC=7001.54
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE
## Training set 0.05994835 3.754125 2.874227 -0.08119768 3.517233 0.9646255
## ACF1
## Training set 0.0155136
cat('\nStandardized regression coefficients:\n')
##
## Standardized regression coefficients:
lm.arima.2.stdcoef=lm.arima.2$coef/sqrt(diag(lm.arima.2$var.coef))
lm.arima.2.stdcoef
## ar1 ar2 ma1 ma2 SP.stab.lag1
## 6.054257 -2.934871 -6.536769 2.858561 2.332394
## WTI.stab.lag1
## -3.650794
Backward selection brings us to the regressive model with lag 1 (differenced) S&P 500 series, as well as the lag 1 (differenced) WTI series.
um <- ts(UMCSENT)
autoplot(um) + geom_smooth(method="lm") + labs(x ="Date", y = "", title="Consumer Sentiment")
#lag plots
xreg=cbind(UMCSENT.stab,SP.stab.lag1, WTI.stab.lag1)
colnames(xreg) = c("UMCSENT", "S&P", "WTI")
lag1.plot(xreg, 12, col="dodgerblue3") #Figure 3.10
v1 = cbind(UMSCENT,SP, WTI)
## Warning in cbind(UMSCENT, SP, WTI): number of rows of result is not a
## multiple of vector length (arg 2)
autoplot_roots(lm.arima.2)
DFC = round(log(length(xreg)))
checkresiduals(lm.arima.2, lag = DFC + length(lm.arima.2$model$phi))
##
## Ljung-Box test
##
## data: Residuals from Regression with ARIMA(2,1,2) errors
## Q* = 2.7288, df = 3, p-value = 0.4354
##
## Model df: 6. Total lags used: 9
r <- cor(xreg, use="complete.obs")
round(r,2)
## UMCSENT S&P WTI
## UMCSENT 1.00 0.25 -0.15
## S&P 0.25 1.00 0.08
## WTI -0.15 0.08 1.00
ggcorrplot(r)
ggcorrplot(r,
hc.order = TRUE,
type = "lower",
lab = TRUE)
library(ggplot2)
library(visreg)
## Warning: package 'visreg' was built under R version 3.6.2
#Conditional plot:
visreg(lm.leaps.2, gg = TRUE)
## [[1]]
##
## [[2]]
#Controlling for S&P, UMSCENT increases with S&P in a linear fashion
#Controlling for WTI, UMSCENT decreases with WTI in a linear fashion
library(vars)
## Loading required package: MASS
##
## Attaching package: 'MASS'
## The following object is masked from 'package:alr3':
##
## forbes
## The following object is masked from 'package:dplyr':
##
## select
## Loading required package: strucchange
## Loading required package: sandwich
##
## Attaching package: 'strucchange'
## The following object is masked from 'package:stringr':
##
## boundary
## Loading required package: lmtest
Model1 <- VAR(v1, p = 1, type = "const", season = NULL, exog = NULL)
Stability1 <- stability(Model1, type = "OLS-CUSUM")
plot(Stability1)
#The stability test is some test for the presence of structural breaks. We know that if we are unable to test for structural breaks and if there happened to be one, the whole estimation may be thrown off. Fortunately, we have a simple test for this which uses a plot of the sum of recursive residuals. If at any point in the graph, the sum goes out of the red critical bounds, then a structural break at that point was seen.
#Based on the results of the test, there seems to be no structural breaks evident. As such, our model passes this particular test
RRPirf <- irf(Model1, impulse = "UMSCENT", response = "SP", n.ahead = 20, boot = TRUE)
plot(RRPirf, ylab = "S&P", main = "UMSCENTS's shock to S&P 500")
RRPirf2 <- irf(Model1, impulse = "UMSCENT", response = "WTI", n.ahead = 20, boot = TRUE)
plot(RRPirf2, ylab = "WTI", main = "UMSCENTS's shock to WTI")
RRPirf3 <- irf(Model1, impulse = "UMSCENT", response = "UMSCENT", n.ahead = 20, boot = TRUE)
plot(RRPirf3, ylab = "UMSCENT", main = "UMSCENTS's shock to UMSCENT")
RRPirf4 <- irf(Model1, impulse = "SP", response = "UMSCENT", n.ahead = 20, boot = TRUE)
plot(RRPirf4, ylab = "UMSCENT", main = "S&P's shock to UMSCENT")
RRPirf5 <- irf(Model1, impulse = "WTI", response = "UMSCENT", n.ahead = 20, boot = TRUE)
plot(RRPirf5, ylab = "UMSCENT", main = "WTI's shock to UMSCENT")
forecast <- predict(Model1, n.ahead = 12, ci = 0.95)
fanchart(forecast, names = "UMCSENT", main = "Fanchart for UMCSENT", xlab = "Horizon", ylab = "UMSCENT")
## Warning in fanchart(forecast, names = "UMCSENT", main = "Fanchart for UMCSENT", :
## Invalid variable name(s) supplied, using first variable.
fanchart(forecast, names = "SP", main = "Fanchart for S&P", xlab = "Horizon", ylab = "SP")
fanchart(forecast, names = "WTI", main = "Fanchart for WTI", xlab = "Horizon", ylab = "WTI")